use rustc_serialize::{Encoder,Encodable};
use core::source::Source;
-/// Informations about a package that is available somewhere in the file system.
+/// Information about a package that is available somewhere in the file system.
///
-/// A package is a `Cargo.toml` file, plus all the files that are part of it.
+/// A package is a `Cargo.toml` file plus all the files that are part of it.
// TODO: Is manifest_path a relic?
#[derive(Clone, Debug)]
pub struct Package {
use core::{Source, SourceId, SourceMap, Summary, Dependency, PackageId, Package};
use util::{CargoResult, ChainError, Config, human, profile};
-/// Source of informations about a group of packages.
+/// Source of information about a group of packages.
///
/// See also `core::Source`.
pub trait Registry {
///
/// The resolution phase of Cargo uses this to drive knowledge about new
/// packages as well as querying for lists of new packages. It is here that
-/// sources are updated and (e.g. network operations) as well as overrides are
+/// sources are updated (e.g. network operations) and overrides are
/// handled.
///
/// The general idea behind this registry is that it is centered around the
-/// `SourceMap` structure contained within which is a mapping of a `SourceId` to
+/// `SourceMap` structure, contained within which is a mapping of a `SourceId` to
/// a `Source`. Each `Source` in the map has been updated (using network
/// operations if necessary) and is ready to be queried for packages.
pub struct PackageRegistry<'cfg> {
// possible. This is where the concept of a lockfile comes into play.
//
// If a summary points at a package id which was previously locked, then we
- // override the summary's id itself as well as all dependencies to be
+ // override the summary's id itself, as well as all dependencies, to be
// rewritten to the locked versions. This will transform the summary's
// source to a precise source (listed in the locked version) as well as
// transforming all of the dependencies from range requirements on imprecise
// one of a few cases can arise:
//
// 1. We have a lock entry for this dependency from the same
- // source as its listed as coming from. In this case we make
+ // source as it's listed as coming from. In this case we make
// sure to lock to precisely the given package id.
//
// 2. We have a lock entry for this dependency, but it's from a
let root = try!(src.root_package());
let target_dir = opts.config.target_dir(&root);
- // If we have a spec, then we need to delete some package,s otherwise, just
+ // If we have a spec, then we need to delete some packages, otherwise, just
// remove the whole target directory and be done with it!
let spec = match opts.spec {
Some(spec) => spec,
use util::config::{ConfigValue, Config};
use util::{CargoResult, internal, human, ChainError, profile};
-/// Contains informations about how a package should be compiled.
+/// Contains information about how a package should be compiled.
pub struct CompileOptions<'a> {
pub config: &'a Config,
/// Number of concurrent jobs to use.
// implicitly converted to registry-based dependencies, so we rewrite those
// dependencies here.
//
- // We also be sure to point all paths at `dst` instead of the previous
- // location that the package was original read from. In locking the
+ // We also make sure to point all paths at `dst` instead of the previous
+ // location that the package was originally read from. In locking the
// `SourceId` we're telling it that the corresponding `PathSource` will be
- // considered updated and won't actually read any packages.
+ // considered updated and we won't actually read any packages.
let registry = try!(SourceId::for_central(config));
let precise = Some("locked".to_string());
let new_src = try!(SourceId::for_path(&dst)).with_precise(precise);
/// Resolve all dependencies for the specified `package` using the previous
/// lockfile as a guide if present.
///
-/// This function will also generate a write the result of resolution as a new
+/// This function will also write the result of resolution as a new
/// lockfile.
pub fn resolve_pkg(registry: &mut PackageRegistry, package: &Package)
-> CargoResult<Resolve> {
// 2. The specified package's summary will have its dependencies
// modified to their precise variants. This will instruct the
// first step of the resolution process to not query for ranges
- // but rather precise dependency versions.
+ // but rather for precise dependency versions.
//
// This process must handle altered dependencies, however, as
// it's possible for a manifest to change over time to have
/// List all files relevant to building this package inside this source.
///
- /// This function will use the appropriate methods to determine what is the
+ /// This function will use the appropriate methods to determine the
/// set of files underneath this source's directory which are relevant for
/// building `pkg`.
///
// We check all packages in this source that are ancestors of the
// specified package (including the same package) to see if they're at
// the root of the git repository. This isn't always true, but it'll get
- // us there most of the time!.
+ // us there most of the time!
let repo = self.packages.iter()
.map(|pkg| pkg.root())
.filter(|path| root.starts_with(path))
let mut ret = Vec::new();
- // We use information from the git repository to guide use in traversing
+ // We use information from the git repository to guide us in traversing
// its tree. The primary purpose of this is to take advantage of the
// .gitignore and auto-ignore files that don't matter.
//
- // Here we're also careful to look at both tracked an untracked files as
+ // Here we're also careful to look at both tracked and untracked files as
// the untracked files are often part of a build and may become relevant
// as part of a future commit.
let index_files = index.iter().map(|entry| {